import glob
import warnings
warnings.filterwarnings('ignore')
warnings.simplefilter('ignore')
import numpy as np
from keras.applications.inception_v3 import InceptionV3, preprocess_input, decode_predictions
from keras.preprocessing.image import load_img, img_to_array
from keras.models import Model, load_model
from keras.layers import Dense, GlobalAveragePooling2D, Dropout
from keras.preprocessing.image import ImageDataGenerator
from keras.callbacks import TensorBoard
from keras.optimizers import SGD
import matplotlib.pyplot as plt
LABELS = ['apple', 'pear']
TRAIN_DIR = 'images/train'
VAL_DIR = 'images/val'
TEST_DIR = 'images/test'
def plot_history(history, params=['loss', 'acc']):
for param in params:
plt.plot(history.history[param], label='train')
if 'val_%s' % param in history.history.keys():
plt.plot(history.history['val_%s' % param], label='test')
plt.xlabel('epoch')
plt.ylabel(param)
plt.title(param.capitalize())
plt.legend()
plt.show()
def predict_image(image_path, model, img_size, preproc_func, decode_func):
image = load_img(image_path)
plt.figure(figsize=(10, 10))
plt.xticks([])
plt.yticks([])
plt.imshow(image)
image = image.resize(img_size)
image = img_to_array(image)
image = np.expand_dims(image, axis=0)
image = preproc_func(image)
pred = model.predict(image)
pred = decode_func(pred, top=3)
title = '\n'.join(['%s (%0.2f%%)' % (p[1].capitalize(), 100 * p[2]) for p in pred[0]])
plt.title(title)
plt.show()
def predict(model, img):
x = img_to_array(img)
x = np.expand_dims(x, axis=0)
x = preprocess_input(x)
class_probs = model.predict(x)[0]
label = LABELS[np.argmax(class_probs)]
conf = np.max(class_probs)
return label, 100 * conf
def predict_test_images(model):
test_images = glob.glob('%s/*' % TEST_DIR)
h, w = np.ceil(len(test_images) / 3), 3
plt.figure(figsize=(5 * w, 5 * h))
for i, img_file in enumerate(test_images):
plt.subplot(h, w, i + 1)
img = load_img(img_file, target_size=(HEIGHT, WIDTH))
label, conf = predict(model, img)
plt.imshow(np.asarray(img))
title = '%s (%0.2f%%)' % (label, conf)
plt.title(title)
plt.xticks([])
plt.yticks([])
plt.tight_layout()
def show_most_confident_predictions(generator, model, h=5, w=5, reverse=False):
ds = []
for i in range(len(generator)):
x_batch, _ = next(generator)
ds.append(x_batch)
ds = np.vstack(ds)
n_img = min(h * w, ds.shape[0])
preds = model.predict(ds, verbose=0)
if reverse:
top_conf = np.argsort(np.max(preds, axis=1))[:n_img]
else:
top_conf = np.argsort(np.max(preds, axis=1))[-n_img::-1]
x = ds[top_conf]
preds = [preds[i] for i in top_conf]
plt.figure(figsize=(3 * w, 3 * h))
for i in range(n_img):
plt.subplot(h, w, i + 1)
image = x[i].reshape(150, 150, 3)
plt.imshow((image + 1) / 2)
title = '%s\n%0.2f%%' % (LABELS[np.argmax(preds[i])], 100 * np.max(preds[i]))
plt.title(title)
plt.xticks([])
plt.yticks([])
plt.tight_layout()
incv3 = InceptionV3(weights='imagenet', include_top=True)
predict_image('images/test/test_0002.jpg',
incv3, (299, 299),
preprocess_input,
decode_predictions)
predict_image('images/test/test_0006.jpg',
incv3, (299, 299),
preprocess_input,
decode_predictions)
base_model = InceptionV3(weights='imagenet', include_top=False)
x = base_model.output
x = GlobalAveragePooling2D(name='avg_pool')(x)
x = Dropout(0.4)(x)
x = Dense(1024, activation='relu')(x)
x = Dropout(0.4)(x)
x = Dense(256, activation='relu')(x)
x = Dropout(0.4)(x)
predictions = Dense(len(LABELS), activation='softmax')(x)
model = Model(inputs=base_model.input, outputs=predictions)
for layer in base_model.layers:
layer.trainable = False
model.compile(optimizer='rmsprop',
loss='categorical_crossentropy',
metrics=['accuracy'])
WIDTH = 150
HEIGHT = 150
BATCH_SIZE = 16
img_gen_train = ImageDataGenerator(
preprocessing_function=preprocess_input,
rotation_range=40,
width_shift_range=0.2,
height_shift_range=0.2,
shear_range=0.2,
zoom_range=0.2,
horizontal_flip=True,
fill_mode='nearest')
img_gen_validation = ImageDataGenerator(
preprocessing_function=preprocess_input)
train_generator = img_gen_train.flow_from_directory(
TRAIN_DIR,
target_size=(HEIGHT, WIDTH),
batch_size=BATCH_SIZE,
class_mode='categorical')
validation_generator = img_gen_validation.flow_from_directory(
VAL_DIR,
target_size=(HEIGHT, WIDTH),
batch_size=BATCH_SIZE,
class_mode='categorical')
x_batch, y_batch = next(train_generator)
h, w = 4, 8
plt.figure(figsize=(12, 9))
for i, (img, class_probs) in enumerate(zip(x_batch, y_batch)):
plt.subplot(h, w, i + 1)
plt.imshow((img + 1) / 2)
plt.title(LABELS[np.argmax(class_probs)])
plt.axis('off')
plt.tight_layout()
plt.show()
EPOCHS = 10
tb = TensorBoard(log_dir="logs", write_graph=True, write_images=True)
history = model.fit_generator(
train_generator,
epochs=EPOCHS,
validation_data=validation_generator,
callbacks=[tb],
steps_per_epoch=len(train_generator),
validation_steps=20)
model.save('bn_head.hdf5')
plot_history(history)
model = load_model('bn_head.hdf5')
score = model.evaluate_generator(validation_generator,
steps=len(validation_generator))
acc = 100 * score[1]
print('Model val accuracy: %0.3f%%' % acc)
predict_test_images(model)
tb = TensorBoard(log_dir="logs", write_graph=True, write_images=True)
for layer in model.layers[:249]:
layer.trainable = False
for layer in model.layers[249:]:
layer.trainable = True
model.compile(optimizer=SGD(lr=1e-4, momentum=0.9),
loss='categorical_crossentropy',
metrics=['accuracy'])
history = model.fit_generator(
train_generator,
steps_per_epoch=2 * len(train_generator),
epochs=EPOCHS,
validation_data=validation_generator,
validation_steps=len(validation_generator),
callbacks=[tb])
model.save('bn_unfreezed_head.hdf5')
plot_history(history)
model = load_model('bn_unfreezed_head.hdf5')
score = model.evaluate_generator(validation_generator,
steps=len(validation_generator))
acc = 100 * score[1]
print('Model val accuracy: %0.3f%%' % acc)
predict_test_images(model)
show_most_confident_predictions(validation_generator, model, h=2, w=6)
show_most_confident_predictions(validation_generator, model, h=2, w=6, reverse=True)